library(dplyr)
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(dplyr)
library(fGarch)
## Loading required package: timeDate
## Loading required package: timeSeries
## Loading required package: fBasics
library(dynlm)
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following object is masked from 'package:timeSeries':
##
## time<-
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
library(tseries)
## Registered S3 method overwritten by 'xts':
## method from
## as.zoo.xts zoo
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
library(astsa)
##
## Attaching package: 'astsa'
## The following object is masked from 'package:fBasics':
##
## nyse
library(xts)
##
## Attaching package: 'xts'
## The following objects are masked from 'package:dplyr':
##
## first, last
library(fpp)
## Loading required package: forecast
##
## Attaching package: 'forecast'
## The following object is masked from 'package:astsa':
##
## gas
## Loading required package: fma
##
## Attaching package: 'fma'
## The following objects are masked from 'package:astsa':
##
## chicken, sales
## Loading required package: expsmooth
## Loading required package: lmtest
##
## Attaching package: 'fpp'
## The following object is masked from 'package:astsa':
##
## oil
library(TSA)
## Registered S3 methods overwritten by 'TSA':
## method from
## fitted.Arima forecast
## plot.Arima forecast
##
## Attaching package: 'TSA'
## The following objects are masked from 'package:timeDate':
##
## kurtosis, skewness
## The following objects are masked from 'package:stats':
##
## acf, arima
## The following object is masked from 'package:utils':
##
## tar
library(forecast)
library(dplR)
library(tseries)
# read file
ifs <- read.csv("IFS_04-02-2020 18-49-44-40.csv",stringsAsFactors =FALSE)
colnames(ifs)=c("Country_Name", "Country_Code", "Indicator_Name", "Indicator_Code", "Time_Period", "Value", "Status")
# remove Status column
ifs=ifs[, c(1:6)]
# dataset preview
head(ifs)
## Country_Name Country_Code
## 1 United States 111
## 2 United States 111
## 3 United States 111
## 4 United States 111
## 5 United States 111
## 6 United States 111
## Indicator_Name
## 1 International Reserves, Official Reserve Assets, US Dollars
## 2 International Reserves, Official Reserve Assets, US Dollars
## 3 International Reserves, Official Reserve Assets, US Dollars
## 4 International Reserves, Official Reserve Assets, US Dollars
## 5 International Reserves, Official Reserve Assets, US Dollars
## 6 International Reserves, Official Reserve Assets, US Dollars
## Indicator_Code Time_Period Value
## 1 RAFA_USD 1980M5 21917557876
## 2 RAFA_USD 1980M9 22994402034
## 3 RAFA_USD 1980M11 25673025848
## 4 RAFA_USD 1981M4 29692779278
## 5 RAFA_USD 1981M9 29715202935
## 6 RAFA_USD 1982M4 31555856317
print(colnames(ifs))
## [1] "Country_Name" "Country_Code" "Indicator_Name" "Indicator_Code"
## [5] "Time_Period" "Value"
## Select country
df1_us=subset(ifs, (Indicator_Code=='LE_PE_NUM') & (Country_Name=='United States'))
## Reaarange time
df1_us$Time_Period <- gsub("M", "/", df1_us$Time_Period)
df1_us['date'] = '/01'
df1_us$Time_Period <- paste(df1_us$Time_Period, df1_us$date, sep="")
df1_us$Time_Period = as.Date(df1_us$Time_Period, "%Y/%m/%d")
df1_us = df1_us[order(df1_us$Time_Period),]
df1_us = subset(df1_us, select = -c(date))
rownames(df1_us)=NULL
df11=ts(df1_us$Value, frequency = 12, start = c(1980, 1))
plot(df11, main="United States: Employment, Persons, Number of")
## 1.Decompose Analysis
# Decompose
plot(decompose(df11))
## There is a obvious growing trend with little drop in 2008-2010 period in the data
## There is seasonality in the data
sp=spectrum(df11, log='no')
sp$fre[which.max(sp$spec)] #0.025
## [1] 0.025
1/sp$fre[which.max(sp$spec)] # 40 month
## [1] 40
max(sp$spec) #estimate of spectral density at predominant period to be 1.344801e+14
## [1] 1.344801e+14
U = qchisq(.025,2)
L = qchisq(.975,2)
2*max(sp$spec)/L #lower bound for spectral density
## [1] 3.645555e+13
2*max(sp$spec)/U #upper bound for spectral density
## [1] 5.311681e+15
# The 95% confidence interval is [3.645555e+13, 5.311681e+15]
df11.diff=diff(df11)#first order difference,d=1
plot(df11.diff, main=" difflog data")
acf(df11.diff,lag.max = 100) ## q = 1
pacf(df11.diff) #p=1,2
arima111 = Arima(df11,order = c(1,1,1),include.drift = TRUE)
arima211 =Arima(df11,order = c(2,1,1),include.drift = TRUE)
AIC(arima111)
## [1] 14295.02
AIC(arima211)
## [1] 14274.71
BIC(arima111)
## [1] 14311.69
BIC(arima211)
## [1] 14295.55
auto.arima(df11,seasonal = F)
## Series: df11
## ARIMA(2,1,2) with drift
##
## Coefficients:
## ar1 ar2 ma1 ma2 drift
## -0.0091 -0.9785 0.1329 0.9694 125233.35
## s.e. 0.0107 0.0099 0.0135 0.0152 33063.04
##
## sigma^2 estimated as 4.713e+11: log likelihood=-7086.8
## AIC=14185.61 AICc=14185.79 BIC=14210.61
AIC(auto.arima(df11,seasonal = F))
## [1] 14185.61
BIC(auto.arima(df11,seasonal = F))
## [1] 14210.61
#Seasonal ARIMA
fit1 <- auto.arima(df11,trace = T) # Best model: ARIMA(3,0,1)(0,1,2)[12] with drift
##
## Fitting models using approximations to speed things up...
##
## ARIMA(2,0,2)(1,1,1)[12] with drift : 12979.15
## ARIMA(0,0,0)(0,1,0)[12] with drift : 14381.27
## ARIMA(1,0,0)(1,1,0)[12] with drift : 13067.65
## ARIMA(0,0,1)(0,1,1)[12] with drift : 13870.24
## ARIMA(0,0,0)(0,1,0)[12] : 14628.2
## ARIMA(2,0,2)(0,1,1)[12] with drift : 12968.43
## ARIMA(2,0,2)(0,1,0)[12] with drift : 13142.5
## ARIMA(2,0,2)(0,1,2)[12] with drift : 12969.52
## ARIMA(2,0,2)(1,1,0)[12] with drift : 13067
## ARIMA(2,0,2)(1,1,2)[12] with drift : 12969.35
## ARIMA(1,0,2)(0,1,1)[12] with drift : 12985.5
## ARIMA(2,0,1)(0,1,1)[12] with drift : 12995.95
## ARIMA(3,0,2)(0,1,1)[12] with drift : 12966.81
## ARIMA(3,0,2)(0,1,0)[12] with drift : 13132.82
## ARIMA(3,0,2)(1,1,1)[12] with drift : 12989.08
## ARIMA(3,0,2)(0,1,2)[12] with drift : 12966.32
## ARIMA(3,0,2)(1,1,2)[12] with drift : 12973.2
## ARIMA(3,0,1)(0,1,2)[12] with drift : 12965.76
## ARIMA(3,0,1)(0,1,1)[12] with drift : 12965.83
## ARIMA(3,0,1)(1,1,2)[12] with drift : 12970.98
## ARIMA(3,0,1)(1,1,1)[12] with drift : 12987.04
## ARIMA(2,0,1)(0,1,2)[12] with drift : 12993.09
## ARIMA(3,0,0)(0,1,2)[12] with drift : 12980.94
## ARIMA(4,0,1)(0,1,2)[12] with drift : 12968.13
## ARIMA(2,0,0)(0,1,2)[12] with drift : 12989.24
## ARIMA(4,0,0)(0,1,2)[12] with drift : 12972.65
## ARIMA(4,0,2)(0,1,2)[12] with drift : 12969.77
## ARIMA(3,0,1)(0,1,2)[12] : Inf
##
## Now re-fitting the best model(s) without approximations...
##
## ARIMA(3,0,1)(0,1,2)[12] with drift : 13322.43
##
## Best model: ARIMA(3,0,1)(0,1,2)[12] with drift
AIC(fit1)
## [1] 13322.11
sarima(df11,3,0,1,0,1,2,12)
## initial value 14.386342
## iter 2 value 13.562982
## iter 3 value 13.502828
## iter 4 value 13.324031
## iter 5 value 13.296946
## iter 6 value 13.014887
## iter 7 value 12.920372
## iter 8 value 12.898252
## iter 9 value 12.898007
## iter 10 value 12.888989
## iter 11 value 12.888153
## iter 12 value 12.887149
## iter 13 value 12.886341
## iter 14 value 12.884883
## iter 15 value 12.881125
## iter 16 value 12.869233
## iter 17 value 12.866209
## iter 18 value 12.865978
## iter 19 value 12.857886
## iter 20 value 12.857396
## iter 21 value 12.856502
## iter 22 value 12.856148
## iter 23 value 12.854197
## iter 24 value 12.853615
## iter 25 value 12.853177
## iter 26 value 12.853088
## iter 27 value 12.853067
## iter 28 value 12.853035
## iter 29 value 12.852932
## iter 30 value 12.852819
## iter 31 value 12.852704
## iter 32 value 12.852627
## iter 33 value 12.852626
## iter 34 value 12.852623
## iter 35 value 12.852621
## iter 36 value 12.852618
## iter 37 value 12.852615
## iter 38 value 12.852614
## iter 39 value 12.852613
## iter 40 value 12.852612
## iter 41 value 12.852612
## iter 41 value 12.852612
## iter 41 value 12.852612
## final value 12.852612
## converged
## initial value 12.862009
## iter 2 value 12.861765
## iter 3 value 12.861551
## iter 4 value 12.861413
## iter 5 value 12.860991
## iter 6 value 12.860384
## iter 7 value 12.859646
## iter 8 value 12.859216
## iter 9 value 12.858946
## iter 10 value 12.858638
## iter 11 value 12.858282
## iter 12 value 12.858117
## iter 13 value 12.858050
## iter 14 value 12.858042
## iter 15 value 12.858038
## iter 16 value 12.858034
## iter 17 value 12.858032
## iter 18 value 12.858027
## iter 19 value 12.858024
## iter 20 value 12.858015
## iter 21 value 12.858013
## iter 22 value 12.858009
## iter 23 value 12.858009
## iter 23 value 12.858008
## iter 23 value 12.858008
## final value 12.858008
## converged
## $fit
##
## Call:
## stats::arima(x = xdata, order = c(p, d, q), seasonal = list(order = c(P, D,
## Q), period = S), xreg = constant, transform.pars = trans, fixed = fixed,
## optim.control = list(trace = trc, REPORT = 1, reltol = tol))
##
## Coefficients:
## ar1 ar2 ar3 ma1 sma1 sma2 constant
## 1.8449 -0.7614 -0.0864 -0.8606 -0.7253 -0.0351 119339.19
## s.e. 0.0763 0.1237 0.0523 0.0617 0.0521 0.0524 17725.74
##
## sigma^2 estimated as 1.434e+11: log likelihood = -6653.06, aic = 13322.11
##
## $degrees_of_freedom
## [1] 459
##
## $ttable
## Estimate SE t.value p.value
## ar1 1.8449 0.0763 24.1908 0.0000
## ar2 -0.7614 0.1237 -6.1545 0.0000
## ar3 -0.0864 0.0523 -1.6512 0.0994
## ma1 -0.8606 0.0617 -13.9417 0.0000
## sma1 -0.7253 0.0521 -13.9341 0.0000
## sma2 -0.0351 0.0524 -0.6696 0.5034
## constant 119339.1945 17725.7407 6.7325 0.0000
##
## $AIC
## [1] 27.92896
##
## $AICc
## [1] 27.92946
##
## $BIC
## [1] 27.99847
# The model is a good fit according to the disgnose plots.
pred1=forecast(fit1,h=48)
pred1 # The last 2 columns are 95% prediction intervals lower bound and upper bound
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## Nov 2019 158514903 158025460 159004347 157766364 159263443
## Dec 2019 158023944 157337176 158710713 156973622 159074266
## Jan 2020 156850068 155990978 157709158 155536203 158163933
## Feb 2020 157824566 156808208 158840924 156270181 159378951
## Mar 2020 158367594 157203612 159531577 156587437 160147752
## Apr 2020 158860109 157555781 160164437 156865312 160854907
## May 2020 159227543 157788910 160666176 157027344 161427742
## Jun 2020 159634382 158066804 161201960 157236978 162031786
## Jul 2020 159924387 158232835 161615939 157337382 162511393
## Aug 2020 159456801 157646020 161267582 156687451 162226152
## Sep 2020 159763796 157838395 161689196 156819150 162708441
## Oct 2020 160076479 158040986 162111972 156963461 163189497
## Nov 2020 160028517 157841956 162215078 156684460 163372574
## Dec 2020 159505235 157177486 161832984 155945250 163065220
## Jan 2021 158315168 155851260 160779076 154546945 162083390
## Feb 2021 159248687 156654019 161843356 155280484 163216891
## Mar 2021 159774908 157054811 162495005 155614878 163934937
## Apr 2021 160253584 157413366 163093802 155909846 164597322
## May 2021 160608140 157653078 163563202 156088762 165127517
## Jun 2021 160998838 157934168 164063507 156311830 165685846
## Jul 2021 161267651 158098561 164436740 156420947 166114355
## Aug 2021 160727268 157458886 163995650 155728709 165725827
## Sep 2021 161012291 157649672 164374910 155869609 166154974
## Oct 2021 161312380 157860494 164764265 156033177 166591582
## Nov 2021 161246675 157683095 164810254 155796650 166696700
## Dec 2021 160707885 157039634 164376137 155097779 166317992
## Jan 2022 159503806 155735237 163272375 153740277 165267335
## Feb 2022 160424816 156560570 164289062 154514962 166334671
## Mar 2022 160939966 156984674 164895257 154890870 166989062
## Apr 2022 161408954 157367238 165450670 155227682 167590225
## May 2022 161755145 157631589 165878702 155448710 168061581
## Jun 2022 162138746 157937875 166339617 155714068 168563423
## Jul 2022 162401670 158127936 166675404 155865557 168937782
## Aug 2022 161856550 157514312 166198787 155215671 168497428
## Sep 2022 162137930 157731445 166544415 155398792 168877068
## Oct 2022 162435413 157968819 166902008 155604346 169266481
## Nov 2022 162368086 157825503 166910669 155420805 169315367
## Dec 2022 161828601 157215294 166441908 154773157 168884045
## Jan 2023 160624699 155943987 165305411 153466168 167783230
## Feb 2023 161546706 156802100 166291312 154290457 168802955
## Mar 2023 162063620 157258606 166868633 154714985 169412254
## Apr 2023 162535088 157673125 167397051 155099357 169970818
## May 2023 162884426 157968926 167799927 155366817 170402036
## Jun 2023 163271791 158306100 168237482 155677422 170866160
## Jul 2023 163539049 158526438 168551660 155872922 171205176
## Aug 2023 162998788 157942438 168055138 155265768 170731808
## Sep 2023 163285506 158188500 168382513 155490307 171080705
## Oct 2023 163588764 158454075 168723453 155735935 171441593
autoplot(pred1) #plot with confidence interval
# fit ARIMA model without seasonal term
fit2=auto.arima(df11, seasonal = FALSE, trace = TRUE) # ARIMA(2,1,2)
##
## Fitting models using approximations to speed things up...
##
## ARIMA(2,1,2) with drift : 14160.76
## ARIMA(0,1,0) with drift : 14289.43
## ARIMA(1,1,0) with drift : 14269.58
## ARIMA(0,1,1) with drift : 14265.89
## ARIMA(0,1,0) : 14299.42
## ARIMA(1,1,2) with drift : 14257.35
## ARIMA(2,1,1) with drift : 14249.3
## ARIMA(3,1,2) with drift : Inf
## ARIMA(2,1,3) with drift : 14217.65
## ARIMA(1,1,1) with drift : 14268.86
## ARIMA(1,1,3) with drift : 14214.89
## ARIMA(3,1,1) with drift : Inf
## ARIMA(3,1,3) with drift : 14164.02
## ARIMA(2,1,2) : 14170.95
##
## Now re-fitting the best model(s) without approximations...
##
## ARIMA(2,1,2) with drift : 14185.79
##
## Best model: ARIMA(2,1,2) with drift
#take the log transformation and first order difference
df11.difflog=diff(log(df11))
plot(df11.difflog, main=" difflog data")
acf(df11.difflog, lag.max = 20) # from pacf plot, we could observe the seasonality
pacf(df11.difflog, lag.max = 20)# from pacf plot, AR(1)
acf2(df11^2) # accoring to pacf plot, ARCH(1) model
## ACF PACF
## [1,] 0.99 0.99
## [2,] 0.98 -0.03
## [3,] 0.98 0.01
## [4,] 0.97 0.02
## [5,] 0.96 -0.01
## [6,] 0.95 0.03
## [7,] 0.95 0.06
## [8,] 0.94 0.01
## [9,] 0.93 -0.02
## [10,] 0.93 0.05
## [11,] 0.92 -0.03
## [12,] 0.91 -0.04
## [13,] 0.91 -0.12
## [14,] 0.90 -0.02
## [15,] 0.89 0.02
## [16,] 0.88 0.00
## [17,] 0.87 0.00
## [18,] 0.86 0.02
## [19,] 0.86 0.06
## [20,] 0.85 0.01
## [21,] 0.84 -0.02
## [22,] 0.84 0.05
## [23,] 0.83 -0.01
## [24,] 0.82 -0.04
## [25,] 0.82 -0.10
## [26,] 0.81 -0.03
## [27,] 0.80 0.02
## [28,] 0.79 0.01
## [29,] 0.78 0.00
## [30,] 0.77 0.01
## [31,] 0.76 0.03
## [32,] 0.76 0.02
## [33,] 0.75 0.00
## [34,] 0.75 0.03
## [35,] 0.74 -0.01
## [36,] 0.73 -0.02
## [37,] 0.72 -0.09
## [38,] 0.71 -0.02
## [39,] 0.71 0.00
## [40,] 0.70 0.02
## [41,] 0.69 -0.01
## [42,] 0.68 0.02
## [43,] 0.67 0.04
## [44,] 0.67 0.01
## [45,] 0.66 0.00
## [46,] 0.66 0.03
## [47,] 0.65 0.00
## [48,] 0.64 0.00
# fit ARIMA(2,1,2) model, then plot the residuals ACF and PACF
arima212=arima(df11, order=c(2,1,2))
residual_arima212=arima212$residuals
acf2(residual_arima212)
## ACF PACF
## [1,] 0.17 0.17
## [2,] 0.22 0.20
## [3,] -0.14 -0.22
## [4,] -0.16 -0.16
## [5,] -0.23 -0.11
## [6,] -0.24 -0.18
## [7,] -0.16 -0.10
## [8,] -0.19 -0.18
## [9,] -0.21 -0.31
## [10,] 0.24 0.29
## [11,] 0.26 0.23
## [12,] 0.70 0.59
## [13,] 0.17 0.04
## [14,] 0.22 -0.01
## [15,] -0.17 -0.14
## [16,] -0.20 -0.08
## [17,] -0.25 -0.08
## [18,] -0.20 0.00
## [19,] -0.21 -0.04
## [20,] -0.21 -0.01
## [21,] -0.22 -0.15
## [22,] 0.22 -0.04
## [23,] 0.23 -0.06
## [24,] 0.63 0.20
## [25,] 0.18 0.02
## [26,] 0.22 0.08
## [27,] -0.22 -0.11
## [28,] -0.18 0.00
## [29,] -0.23 0.01
## [30,] -0.21 -0.04
## [31,] -0.22 0.00
## [32,] -0.21 0.02
## [33,] -0.22 -0.08
## [34,] 0.22 0.03
## [35,] 0.21 -0.06
## [36,] 0.61 0.13
## [37,] 0.14 -0.09
## [38,] 0.20 -0.02
## [39,] -0.22 -0.07
## [40,] -0.20 -0.05
## [41,] -0.21 0.04
## [42,] -0.20 0.01
## [43,] -0.22 -0.01
## [44,] -0.22 -0.04
## [45,] -0.20 -0.01
## [46,] 0.24 0.07
## [47,] 0.19 -0.02
## [48,] 0.59 0.05
acf2(residual_arima212^2)
## ACF PACF
## [1,] 0.02 0.02
## [2,] -0.07 -0.07
## [3,] -0.08 -0.08
## [4,] 0.01 0.01
## [5,] -0.01 -0.02
## [6,] -0.09 -0.10
## [7,] -0.01 -0.01
## [8,] 0.04 0.02
## [9,] -0.07 -0.09
## [10,] -0.12 -0.12
## [11,] 0.11 0.10
## [12,] 0.39 0.37
## [13,] 0.06 0.06
## [14,] -0.11 -0.07
## [15,] -0.06 -0.02
## [16,] 0.02 0.02
## [17,] -0.01 0.00
## [18,] -0.09 -0.04
## [19,] -0.01 -0.02
## [20,] 0.01 -0.03
## [21,] -0.07 -0.03
## [22,] -0.10 -0.02
## [23,] 0.03 -0.06
## [24,] 0.38 0.24
## [25,] 0.05 0.04
## [26,] -0.08 -0.01
## [27,] -0.09 -0.05
## [28,] 0.00 -0.03
## [29,] 0.01 0.03
## [30,] -0.08 -0.01
## [31,] 0.00 0.01
## [32,] 0.02 -0.01
## [33,] -0.08 -0.04
## [34,] -0.09 0.01
## [35,] 0.03 -0.03
## [36,] 0.42 0.24
## [37,] 0.00 -0.06
## [38,] -0.11 -0.05
## [39,] -0.08 -0.01
## [40,] 0.04 0.04
## [41,] -0.06 -0.09
## [42,] -0.05 0.02
## [43,] 0.00 0.02
## [44,] 0.03 0.00
## [45,] -0.05 0.02
## [46,] -0.04 0.08
## [47,] 0.08 0.02
## [48,] 0.32 0.07
garch_us1=garch(residual_arima212,order=c(3,3),trace=F)
## Warning in garch(residual_arima212, order = c(3, 3), trace = F): singular
## information
summary(garch_us1)
##
## Call:
## garch(x = residual_arima212, order = c(3, 3), trace = F)
##
## Model:
## GARCH(3,3)
##
## Residuals:
## Min 1Q Median 3Q Max
## -3.3137 -0.4171 0.1818 0.7922 2.9313
##
## Coefficient(s):
## Estimate Std. Error t value Pr(>|t|)
## a0 3.263e+11 NA NA NA
## a1 5.171e-02 NA NA NA
## a2 4.897e-03 NA NA NA
## a3 3.661e-03 NA NA NA
## b1 7.308e-02 NA NA NA
## b2 7.493e-02 NA NA NA
## b3 7.711e-02 NA NA NA
##
## Diagnostic Tests:
## Jarque Bera Test
##
## data: Residuals
## X-squared = 4.6405, df = 2, p-value = 0.09825
##
##
## Box-Ljung test
##
## data: Squared.Residuals
## X-squared = 0.33624, df = 1, p-value = 0.562
AIC(garch_us1)
## [1] 14142.81
garch_us2=garch(residual_arima212,order=c(0,3),trace=F) # (q,p)
## Warning in garch(residual_arima212, order = c(0, 3), trace = F): singular
## information
summary(garch_us2)
##
## Call:
## garch(x = residual_arima212, order = c(0, 3), trace = F)
##
## Model:
## GARCH(0,3)
##
## Residuals:
## Min 1Q Median 3Q Max
## -3.4527 -0.4341 0.1904 0.8166 3.0020
##
## Coefficient(s):
## Estimate Std. Error t value Pr(>|t|)
## a0 3.962e+11 NA NA NA
## a1 6.243e-02 NA NA NA
## a2 1.700e-03 NA NA NA
## a3 9.513e-04 NA NA NA
##
## Diagnostic Tests:
## Jarque Bera Test
##
## data: Residuals
## X-squared = 4.9537, df = 2, p-value = 0.08401
##
##
## Box-Ljung test
##
## data: Squared.Residuals
## X-squared = 0.71223, df = 1, p-value = 0.3987
AIC(garch_us2) # the AIC is smaller, so ARCH(3) model is better
## [1] 14139.16
## Select country
df1_uk=subset(ifs, (Indicator_Code=='LE_PE_NUM') & (Country_Name=='United Kingdom'))
df1_uk$Time_Period <- gsub("M", "/", df1_uk$Time_Period)
df1_uk['date'] = '/01'
df1_uk$Time_Period <- paste(df1_uk$Time_Period, df1_uk$date, sep="")
df1_uk$Time_Period = as.Date(df1_uk$Time_Period, "%Y/%m/%d")
df1_uk = df1_uk[order(df1_uk$Time_Period),]
df1_uk = subset(df1_uk, select = -c(date))
rownames(df1_uk)=NULL
df22=ts(df1_uk$Value, frequency = 12, start = c(1992, 4))
plot(df22, main="United Kingdom: Employment, Persons, Number of")
plot(decompose(df22))
## There is a obvious growing trend with little drop in 2008-2010 period in the data
## There is seasonality in the data
sp=spectrum(df22, log='no')
# find the cycle with predominant frequecny
sp$fre[which.max(sp$spec)] # 0.066
## [1] 0.06666667
# find the cycle of predominant frequecny
1/sp$fre[which.max(sp$spec)] # 15 month
## [1] 15
max(sp$spec) #estimate of spectral density at predominant period which is close to 899257074378.
## [1] 899257074378
U = qchisq(.025,2)
L = qchisq(.975,2)
2*max(sp$spec)/L #lower bound for spectral density
## [1] 243775131599
2*max(sp$spec)/U #upper bound for spectral density
## [1] 3.551876e+13
# The 95% confidence interval is [243775131599, 3.551876e+13]
df22.diff = diff(df22)
plot(df22.diff)
acf2(df22.diff)#p = 4,5 , q = 1,2
## ACF PACF
## [1,] 0.45 0.45
## [2,] 0.10 -0.13
## [3,] 0.00 0.02
## [4,] 0.05 0.07
## [5,] -0.27 -0.42
## [6,] -0.45 -0.20
## [7,] -0.25 0.09
## [8,] 0.04 0.12
## [9,] 0.00 -0.07
## [10,] 0.09 0.19
## [11,] 0.30 0.14
## [12,] 0.48 0.17
## [13,] 0.28 0.04
## [14,] 0.00 -0.14
## [15,] -0.03 0.02
## [16,] 0.01 0.10
## [17,] -0.23 -0.13
## [18,] -0.41 -0.12
## [19,] -0.28 -0.02
## [20,] 0.01 0.05
## [21,] -0.04 -0.16
## [22,] 0.01 0.11
## [23,] 0.29 0.19
## [24,] 0.50 0.08
## [25,] 0.29 0.04
## [26,] 0.05 0.00
## [27,] 0.09 0.11
## [28,] 0.03 -0.07
## [29,] -0.26 -0.05
## [30,] -0.44 -0.10
## [31,] -0.26 0.02
## [32,] -0.05 -0.02
## [33,] -0.05 -0.07
## [34,] 0.00 0.03
## [35,] 0.28 0.06
## [36,] 0.43 0.01
## [37,] 0.27 0.03
## [38,] 0.04 0.01
## [39,] 0.06 0.03
## [40,] 0.01 -0.03
## [41,] -0.29 -0.09
## [42,] -0.46 -0.04
## [43,] -0.28 -0.02
## [44,] -0.05 0.03
## [45,] -0.04 0.00
## [46,] -0.02 -0.01
## [47,] 0.26 0.01
## [48,] 0.41 0.01
arima411 = Arima(df22,order = c(4,1,1),include.drift = T)
arima511 = Arima(df22,order = c(5,1,1),include.drift = T)
arima412 = Arima(df22,order = c(4,1,2),include.drift = T)
arima512 = Arima(df22,order = c(5,1,2),include.drift = T)
AIC(arima411)
## [1] 8035.022
AIC(arima511)
## [1] 7979.001
AIC(arima412)
## [1] 8012.329
AIC(arima512)
## [1] 7954.431
BIC(arima411)
## [1] 8061.509
BIC(arima511)
## [1] 8009.272
BIC(arima412)
## [1] 8042.599
BIC(arima512)
## [1] 7988.485
auto.arima(df22,seasonal = F)
## Series: df22
## ARIMA(5,1,1) with drift
##
## Coefficients:
## ar1 ar2 ar3 ar4 ar5 ma1 drift
## 0.7583 -0.2536 -0.0473 0.2995 -0.4508 -0.2670 22452.337
## s.e. 0.0835 0.0718 0.0652 0.0631 0.0500 0.0853 2975.167
##
## sigma^2 estimated as 2.608e+09: log likelihood=-3981.5
## AIC=7979 AICc=7979.46 BIC=8009.27
## arima512 has a lower AIC and BIC.
fit22 <- auto.arima(df22) # Best model: ARIMA(2,1,3)(2,1,1)[12]
fit22
## Series: df22
## ARIMA(2,1,3)(2,1,1)[12]
##
## Coefficients:
## ar1 ar2 ma1 ma2 ma3 sar1 sar2 sma1
## 0.0434 0.8824 0.2511 -0.7471 -0.2454 -0.1288 -0.0045 -0.8871
## s.e. 0.0791 0.0754 0.0932 0.0862 0.0553 0.0721 0.0681 0.0483
##
## sigma^2 estimated as 1.981e+09: log likelihood=-3798.04
## AIC=7614.08 AICc=7614.67 BIC=7647.79
sarima(df22,2,1,3,2,1,1,12) # not a very good fit
## initial value 11.084866
## iter 2 value 10.871398
## iter 3 value 10.773249
## iter 4 value 10.758966
## iter 5 value 10.739943
## iter 6 value 10.730917
## iter 7 value 10.727753
## iter 8 value 10.722921
## iter 9 value 10.721991
## iter 10 value 10.720711
## iter 11 value 10.716986
## iter 12 value 10.714496
## iter 13 value 10.714321
## iter 14 value 10.714122
## iter 15 value 10.714107
## iter 16 value 10.714095
## iter 17 value 10.714094
## iter 18 value 10.714093
## iter 18 value 10.714093
## final value 10.714093
## converged
## initial value 10.725882
## iter 2 value 10.724655
## iter 3 value 10.724042
## iter 4 value 10.723563
## iter 5 value 10.723465
## iter 6 value 10.723337
## iter 7 value 10.722543
## iter 8 value 10.721962
## iter 9 value 10.720987
## iter 10 value 10.719763
## iter 11 value 10.718355
## iter 12 value 10.717664
## iter 13 value 10.717299
## iter 14 value 10.717109
## iter 15 value 10.716965
## iter 16 value 10.716894
## iter 17 value 10.716859
## iter 18 value 10.716723
## iter 19 value 10.716685
## iter 20 value 10.716650
## iter 21 value 10.716639
## iter 22 value 10.716518
## iter 23 value 10.716410
## iter 24 value 10.716162
## iter 25 value 10.715816
## iter 26 value 10.715631
## iter 27 value 10.715531
## iter 28 value 10.715509
## iter 29 value 10.715494
## iter 30 value 10.715476
## iter 31 value 10.715447
## iter 32 value 10.715404
## iter 33 value 10.715371
## iter 34 value 10.715368
## iter 35 value 10.715367
## iter 35 value 10.715367
## final value 10.715367
## converged
## $fit
##
## Call:
## stats::arima(x = xdata, order = c(p, d, q), seasonal = list(order = c(P, D,
## Q), period = S), include.mean = !no.constant, transform.pars = trans, fixed = fixed,
## optim.control = list(trace = trc, REPORT = 1, reltol = tol))
##
## Coefficients:
## ar1 ar2 ma1 ma2 ma3 sar1 sar2 sma1
## 0.0434 0.8824 0.2511 -0.7471 -0.2454 -0.1288 -0.0045 -0.8871
## s.e. 0.0791 0.0754 0.0932 0.0862 0.0553 0.0721 0.0681 0.0483
##
## sigma^2 estimated as 1.903e+09: log likelihood = -3798.04, aic = 7614.08
##
## $degrees_of_freedom
## [1] 305
##
## $ttable
## Estimate SE t.value p.value
## ar1 0.0434 0.0791 0.5489 0.5835
## ar2 0.8824 0.0754 11.7013 0.0000
## ma1 0.2511 0.0932 2.6946 0.0074
## ma2 -0.7471 0.0862 -8.6637 0.0000
## ma3 -0.2454 0.0553 -4.4386 0.0000
## sar1 -0.1288 0.0721 -1.7859 0.0751
## sar2 -0.0045 0.0681 -0.0657 0.9476
## sma1 -0.8871 0.0483 -18.3607 0.0000
##
## $AIC
## [1] 23.50023
##
## $AICc
## [1] 23.50164
##
## $BIC
## [1] 23.60429
pred22 = forecast(fit22,h=48)
pred22
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## Jun 2019 32864165 32807113 32921218 32776911 32951420
## Jul 2019 32983157 32889832 33076482 32840429 33125885
## Aug 2019 33021524 32897095 33145954 32831225 33211823
## Sep 2019 33024187 32874342 33174031 32795020 33253354
## Oct 2019 33051291 32875983 33226599 32783181 33319402
## Nov 2019 33087272 32889119 33285425 32784223 33390321
## Dec 2019 33061521 32839992 33283050 32722721 33400321
## Jan 2020 33025561 32782279 33268842 32653494 33397627
## Feb 2020 33032692 32767117 33298267 32626530 33438854
## Mar 2020 33070450 32783719 33357182 32631932 33508968
## Apr 2020 33099202 32790852 33407551 32627621 33570782
## May 2020 33135923 32806808 33465037 32632585 33639260
## Jun 2020 33232482 32882538 33582427 32697289 33767676
## Jul 2020 33347528 32977479 33717576 32781587 33913469
## Aug 2020 33379790 32989372 33770209 32782696 33976885
## Sep 2020 33384722 32974450 33794994 32757265 34012179
## Oct 2020 33414795 32984476 33845114 32756679 34072911
## Nov 2020 33454453 33004519 33904388 32766338 34142569
## Dec 2020 33438402 32968719 33908085 32720084 34156720
## Jan 2021 33396378 32907314 33885441 32648420 34144336
## Feb 2021 33398803 32890276 33907330 32621077 34176528
## Mar 2021 33437801 32910129 33965472 32630797 34244804
## Apr 2021 33461260 32914402 34008117 32624913 34297606
## May 2021 33499523 32933763 34065283 32634267 34364779
## Jun 2021 33596477 33010179 34182775 32699812 34493142
## Jul 2021 33711166 33104196 34318136 32782885 34639447
## Aug 2021 33742789 33115022 34370555 32782703 34702875
## Sep 2021 33746812 33098558 34395067 32755392 34738233
## Oct 2021 33775487 33106671 34444303 32752621 34798352
## Nov 2021 33814150 33125051 34503250 32760263 34868037
## Dec 2021 33796183 33086768 34505598 32711226 34881140
## Jan 2022 33754096 33024619 34483574 32638457 34869736
## Feb 2022 33756032 33006493 34505571 32609711 34902353
## Mar 2022 33794318 33024951 34563685 32617672 34970964
## Apr 2022 33817439 33028271 34606607 32610511 35024368
## May 2022 33855005 33046253 34663756 32618126 35091884
## Jun 2022 33951166 33121642 34780691 32682518 35219814
## Jul 2022 34065363 33214953 34915772 32764774 35365951
## Aug 2022 34096355 33225001 34967709 32763734 35428976
## Sep 2022 34099963 33207893 34992032 32735660 35464265
## Oct 2022 34128128 33215321 35040935 32732110 35524146
## Nov 2022 34166413 33233082 35099743 32739007 35593818
## Dec 2022 34148031 33194185 35101877 32689250 35606812
## Jan 2023 34105520 33131361 35079679 32615672 35595368
## Feb 2023 34106975 33112534 35101415 32586109 35627840
## Mar 2023 34144916 33130386 35159447 32593325 35696507
## Apr 2023 34167588 33133018 35202157 32585350 35749825
## May 2023 34204834 33150409 35259259 32592230 35817438
autoplot(pred22)
# fit ARIMA model without seasonal term
fit3=auto.arima(df22, seasonal = FALSE)
fit3
## Series: df22
## ARIMA(5,1,1) with drift
##
## Coefficients:
## ar1 ar2 ar3 ar4 ar5 ma1 drift
## 0.7583 -0.2536 -0.0473 0.2995 -0.4508 -0.2670 22452.337
## s.e. 0.0835 0.0718 0.0652 0.0631 0.0500 0.0853 2975.167
##
## sigma^2 estimated as 2.608e+09: log likelihood=-3981.5
## AIC=7979 AICc=7979.46 BIC=8009.27
## ARIMA(5,1,1)
df22.diff=diff(df22)
plot(df22.diff, main=" diff data")
df22.log=log(df22)
plot(df22.log, main=" log data")
df22.difflog=diff(log(df22))
plot(df22.difflog, main=" difflog data")
acf2(df22.difflog)
## ACF PACF
## [1,] 0.47 0.47
## [2,] 0.11 -0.14
## [3,] 0.00 0.02
## [4,] 0.04 0.06
## [5,] -0.29 -0.44
## [6,] -0.47 -0.21
## [7,] -0.27 0.09
## [8,] 0.02 0.11
## [9,] -0.01 -0.07
## [10,] 0.09 0.20
## [11,] 0.32 0.15
## [12,] 0.50 0.17
## [13,] 0.29 0.04
## [14,] 0.01 -0.15
## [15,] -0.03 0.02
## [16,] 0.00 0.10
## [17,] -0.25 -0.15
## [18,] -0.44 -0.11
## [19,] -0.30 -0.02
## [20,] 0.00 0.04
## [21,] -0.05 -0.15
## [22,] 0.01 0.11
## [23,] 0.31 0.18
## [24,] 0.51 0.07
## [25,] 0.30 0.03
## [26,] 0.05 0.00
## [27,] 0.08 0.10
## [28,] 0.02 -0.07
## [29,] -0.27 -0.05
## [30,] -0.46 -0.09
## [31,] -0.28 0.02
## [32,] -0.06 -0.01
## [33,] -0.06 -0.06
## [34,] 0.00 0.03
## [35,] 0.29 0.06
## [36,] 0.45 0.01
## [37,] 0.28 0.02
## [38,] 0.05 0.01
## [39,] 0.06 0.02
## [40,] 0.01 -0.03
## [41,] -0.29 -0.08
## [42,] -0.47 -0.04
## [43,] -0.29 -0.02
## [44,] -0.06 0.03
## [45,] -0.05 0.00
## [46,] -0.02 -0.01
## [47,] 0.27 0.01
## [48,] 0.42 0.01
## From the acf and pacf we found that there is some patterns. Therefore, we should fit a garch model. AR(1)
acf2(df22^2)## ARCH(1)
## ACF PACF
## [1,] 0.99 0.99
## [2,] 0.98 -0.02
## [3,] 0.97 -0.01
## [4,] 0.96 0.01
## [5,] 0.95 -0.01
## [6,] 0.94 -0.02
## [7,] 0.92 0.01
## [8,] 0.91 0.00
## [9,] 0.90 -0.01
## [10,] 0.89 -0.02
## [11,] 0.88 -0.02
## [12,] 0.87 -0.01
## [13,] 0.86 -0.01
## [14,] 0.85 -0.02
## [15,] 0.84 0.00
## [16,] 0.82 0.00
## [17,] 0.81 0.01
## [18,] 0.80 -0.02
## [19,] 0.79 0.01
## [20,] 0.78 -0.01
## [21,] 0.77 0.00
## [22,] 0.76 -0.02
## [23,] 0.74 -0.02
## [24,] 0.73 -0.01
## [25,] 0.72 -0.01
## [26,] 0.71 -0.02
## [27,] 0.70 0.01
## [28,] 0.69 0.00
## [29,] 0.68 0.00
## [30,] 0.66 -0.01
## [31,] 0.65 0.00
## [32,] 0.64 0.01
## [33,] 0.63 -0.02
## [34,] 0.62 -0.02
## [35,] 0.61 -0.01
## [36,] 0.60 0.00
## [37,] 0.59 -0.01
## [38,] 0.57 -0.01
## [39,] 0.56 0.01
## [40,] 0.55 0.00
## [41,] 0.54 -0.01
## [42,] 0.53 -0.01
## [43,] 0.52 0.00
## [44,] 0.51 0.01
## [45,] 0.50 -0.01
## [46,] 0.49 -0.01
## [47,] 0.48 0.00
## [48,] 0.47 0.00
# fit arima(5,1,1)
arima511 = arima(df22,order=c(5,1,1))
residual_arima511 = arima511$residuals
acf2(residual_arima511^2)
## ACF PACF
## [1,] -0.01 -0.01
## [2,] 0.20 0.20
## [3,] 0.07 0.07
## [4,] 0.07 0.03
## [5,] 0.03 0.01
## [6,] 0.02 0.00
## [7,] -0.02 -0.03
## [8,] 0.02 0.01
## [9,] -0.07 -0.07
## [10,] 0.13 0.12
## [11,] 0.04 0.07
## [12,] 0.09 0.06
## [13,] 0.07 0.04
## [14,] 0.06 0.02
## [15,] 0.06 0.02
## [16,] -0.07 -0.11
## [17,] 0.15 0.14
## [18,] -0.02 0.00
## [19,] -0.02 -0.05
## [20,] 0.01 0.00
## [21,] 0.03 0.05
## [22,] 0.04 0.04
## [23,] 0.01 -0.02
## [24,] 0.00 -0.03
## [25,] 0.09 0.06
## [26,] -0.04 -0.01
## [27,] 0.01 -0.06
## [28,] -0.08 -0.10
## [29,] -0.02 -0.02
## [30,] 0.02 0.07
## [31,] 0.03 0.06
## [32,] -0.05 -0.08
## [33,] 0.03 0.03
## [34,] -0.07 -0.08
## [35,] 0.14 0.11
## [36,] -0.04 -0.01
## [37,] 0.06 0.02
## [38,] 0.01 0.02
## [39,] 0.03 0.01
## [40,] 0.02 0.03
## [41,] -0.01 -0.01
## [42,] 0.07 0.06
## [43,] 0.17 0.19
## [44,] 0.00 0.02
## [45,] 0.23 0.19
## [46,] 0.05 0.05
## [47,] 0.15 0.05
## [48,] 0.12 0.07
summary(garchFit(~arma(5,1)+garch(1,1), df22.difflog))
##
## Series Initialization:
## ARMA Model: arma
## Formula Mean: ~ arma(5, 1)
## GARCH Model: garch
## Formula Variance: ~ garch(1, 1)
## ARMA Order: 5 1
## Max ARMA Order: 5
## GARCH Order: 1 1
## Max GARCH Order: 1
## Maximum Order: 5
## Conditional Dist: norm
## h.start: 6
## llh.start: 1
## Length of Series: 325
## Recursion Init: mci
## Series Scale: 0.002262591
##
## Parameter Initialization:
## Initial Parameters: $params
## Limits of Transformations: $U, $V
## Which Parameters are Fixed? $includes
## Parameter Matrix:
## U V params includes
## mu -3.35797923 3.357979 0.34224892 TRUE
## ar1 -0.99999999 1.000000 0.76916698 TRUE
## ar2 -0.99999999 1.000000 -0.26344089 TRUE
## ar3 -0.99999999 1.000000 -0.04869207 TRUE
## ar4 -0.99999999 1.000000 0.31133629 TRUE
## ar5 -0.99999999 1.000000 -0.47352930 TRUE
## ma1 -0.99999999 1.000000 -0.26786887 TRUE
## omega 0.00000100 100.000000 0.10000000 TRUE
## alpha1 0.00000001 1.000000 0.10000000 TRUE
## gamma1 -0.99999999 1.000000 0.10000000 FALSE
## beta1 0.00000001 1.000000 0.80000000 TRUE
## delta 0.00000000 2.000000 2.00000000 FALSE
## skew 0.10000000 10.000000 1.00000000 FALSE
## shape 1.00000000 10.000000 4.00000000 FALSE
## Index List of Parameters to be Optimized:
## mu ar1 ar2 ar3 ar4 ar5 ma1 omega alpha1 beta1
## 1 2 3 4 5 6 7 8 9 11
## Persistence: 0.9
##
##
## --- START OF TRACE ---
## Selected Algorithm: nlminb
##
## R coded nlminb Solver:
##
## 0: 377.48613: 0.342249 0.769167 -0.263441 -0.0486921 0.311336 -0.473529 -0.267869 0.100000 0.100000 0.800000
## 1: 373.43253: 0.341966 0.767989 -0.264698 -0.0500675 0.309883 -0.475060 -0.268039 0.0914690 0.0955537 0.793343
## 2: 369.77578: 0.341175 0.764696 -0.268163 -0.0538182 0.305889 -0.479355 -0.268624 0.0744947 0.0866795 0.781362
## 3: 368.73389: 0.339117 0.756345 -0.276560 -0.0626328 0.296259 -0.490454 -0.270910 0.0834987 0.0900083 0.785699
## 4: 368.03044: 0.338569 0.754972 -0.277630 -0.0636957 0.294948 -0.492639 -0.271650 0.0745500 0.0858335 0.780094
## 5: 367.70195: 0.336522 0.751671 -0.278907 -0.0644204 0.293149 -0.499261 -0.275301 0.0785891 0.0894200 0.781818
## 6: 367.28922: 0.331155 0.748399 -0.277145 -0.0609421 0.295413 -0.510728 -0.284619 0.0715862 0.0915855 0.778275
## 7: 366.94931: 0.324740 0.751911 -0.275357 -0.0592148 0.302971 -0.513841 -0.293392 0.0770102 0.0950558 0.776720
## 8: 366.50452: 0.312026 0.771518 -0.280474 -0.0740057 0.309454 -0.507479 -0.304730 0.0780596 0.0932281 0.770608
## 9: 366.20279: 0.299707 0.780180 -0.274370 -0.0580949 0.310622 -0.514114 -0.328066 0.0815709 0.0917378 0.770731
## 10: 366.14197: 0.299443 0.779615 -0.275475 -0.0591049 0.310093 -0.514353 -0.328252 0.0793892 0.0910641 0.769552
## 11: 366.10758: 0.299022 0.778908 -0.277049 -0.0604938 0.309471 -0.514591 -0.328577 0.0805255 0.0922177 0.770262
## 12: 366.09882: 0.298644 0.778663 -0.278069 -0.0613373 0.309291 -0.514536 -0.328812 0.0782208 0.0917513 0.769054
## 13: 366.06346: 0.298101 0.778455 -0.279054 -0.0609810 0.309583 -0.513873 -0.329487 0.0791906 0.0936049 0.770073
## 14: 366.03901: 0.296698 0.778852 -0.281361 -0.0598142 0.309983 -0.512431 -0.330812 0.0764715 0.0958237 0.770026
## 15: 365.99987: 0.294628 0.786842 -0.285624 -0.0568315 0.306066 -0.512029 -0.330173 0.0756710 0.0976599 0.773711
## 16: 365.94812: 0.292484 0.789228 -0.291192 -0.0537997 0.306113 -0.508544 -0.335023 0.0764872 0.0915949 0.775345
## 17: 365.94001: 0.291949 0.789797 -0.291251 -0.0545509 0.305967 -0.508933 -0.335522 0.0770443 0.0925668 0.775575
## 18: 365.92707: 0.291604 0.790507 -0.290584 -0.0553598 0.305807 -0.508879 -0.336400 0.0759213 0.0927467 0.774935
## 19: 365.91671: 0.291154 0.791021 -0.289983 -0.0559237 0.306175 -0.508565 -0.337516 0.0763639 0.0936045 0.775066
## 20: 365.89750: 0.290100 0.792285 -0.289076 -0.0567057 0.307640 -0.508215 -0.339503 0.0754878 0.0941219 0.774363
## 21: 365.88368: 0.288182 0.798276 -0.289805 -0.0579616 0.306424 -0.511027 -0.339172 0.0765511 0.0930184 0.774667
## 22: 365.84244: 0.286201 0.796859 -0.292015 -0.0559792 0.309535 -0.507427 -0.342878 0.0758738 0.0927686 0.774425
## 23: 365.83730: 0.286107 0.796930 -0.292060 -0.0561294 0.309444 -0.507540 -0.342985 0.0765135 0.0931136 0.774745
## 24: 365.83418: 0.285903 0.797120 -0.292115 -0.0564029 0.309308 -0.507728 -0.343204 0.0761681 0.0930633 0.774487
## 25: 365.82944: 0.285523 0.797509 -0.292042 -0.0562667 0.308742 -0.507346 -0.344080 0.0766514 0.0932967 0.774801
## 26: 365.81938: 0.284696 0.798532 -0.292251 -0.0552691 0.307994 -0.506398 -0.345578 0.0763433 0.0927186 0.774557
## 27: 365.80708: 0.283245 0.800972 -0.294728 -0.0540023 0.311513 -0.508379 -0.345610 0.0768962 0.0932732 0.774440
## 28: 365.78108: 0.281193 0.803328 -0.292120 -0.0569490 0.307733 -0.503450 -0.355235 0.0752569 0.0945944 0.776247
## 29: 365.77861: 0.281116 0.803507 -0.292154 -0.0570100 0.307721 -0.503613 -0.355185 0.0748725 0.0943438 0.775964
## 30: 365.77668: 0.280996 0.803777 -0.292210 -0.0570921 0.307728 -0.503846 -0.355123 0.0752300 0.0943957 0.776051
## 31: 365.77410: 0.280676 0.804006 -0.292074 -0.0571378 0.307569 -0.503683 -0.354749 0.0750979 0.0938830 0.775627
## 32: 365.77009: 0.280057 0.804055 -0.291686 -0.0570893 0.307082 -0.502760 -0.353959 0.0760862 0.0934396 0.775341
## 33: 365.75550: 0.279191 0.809881 -0.298032 -0.0564349 0.310382 -0.505257 -0.361202 0.0760220 0.0957415 0.773421
## 34: 365.74772: 0.275147 0.811142 -0.297880 -0.0549356 0.310227 -0.502986 -0.359403 0.0752504 0.0947386 0.772444
## 35: 365.72846: 0.272515 0.814620 -0.296982 -0.0565074 0.312663 -0.502523 -0.365053 0.0768724 0.0897596 0.776635
## 36: 365.72107: 0.272146 0.816441 -0.296757 -0.0574921 0.310950 -0.503976 -0.363591 0.0757229 0.0920860 0.776352
## 37: 365.72020: 0.272131 0.816451 -0.296744 -0.0573734 0.311130 -0.503803 -0.363590 0.0759592 0.0922544 0.776461
## 38: 365.71972: 0.272105 0.816452 -0.296752 -0.0572597 0.311316 -0.503620 -0.363586 0.0757171 0.0922106 0.776296
## 39: 365.71897: 0.272030 0.816441 -0.296892 -0.0569999 0.311178 -0.503415 -0.363547 0.0761254 0.0925374 0.775821
## 40: 365.71819: 0.271866 0.816420 -0.297197 -0.0564421 0.310773 -0.503046 -0.363452 0.0764201 0.0929133 0.774551
## 41: 365.71715: 0.271403 0.817112 -0.298075 -0.0559646 0.309982 -0.502116 -0.363317 0.0775007 0.0937605 0.772452
## 42: 365.71601: 0.270668 0.817371 -0.297798 -0.0563674 0.310964 -0.502108 -0.363515 0.0765175 0.0932707 0.774421
## 43: 365.71601: 0.270282 0.818869 -0.298435 -0.0550688 0.310548 -0.501631 -0.365253 0.0770952 0.0927087 0.773217
## 44: 365.71580: 0.269914 0.818127 -0.298041 -0.0547517 0.310487 -0.502029 -0.365321 0.0777691 0.0929483 0.772890
## 45: 365.71502: 0.270024 0.818317 -0.297846 -0.0559215 0.311166 -0.501960 -0.365159 0.0779606 0.0929667 0.772045
## 46: 365.71495: 0.269910 0.818774 -0.298692 -0.0553834 0.310578 -0.501664 -0.365278 0.0773870 0.0928270 0.772861
## 47: 365.71483: 0.269926 0.819242 -0.298979 -0.0555634 0.310613 -0.501483 -0.365396 0.0774700 0.0929646 0.773050
## 48: 365.71468: 0.269850 0.819032 -0.298701 -0.0556750 0.310811 -0.501648 -0.365339 0.0775362 0.0931294 0.772651
## 49: 365.71466: 0.269456 0.818731 -0.298424 -0.0553742 0.310926 -0.501804 -0.365383 0.0777759 0.0930693 0.772449
## 50: 365.71450: 0.269284 0.819054 -0.298573 -0.0553978 0.310997 -0.501701 -0.365556 0.0777113 0.0930174 0.772387
## 51: 365.71444: 0.269199 0.819276 -0.298538 -0.0556780 0.311212 -0.501719 -0.365640 0.0778228 0.0932844 0.772048
## 52: 365.71441: 0.269084 0.819555 -0.298811 -0.0554462 0.311086 -0.501582 -0.365770 0.0777411 0.0931977 0.772282
## 53: 365.71439: 0.269041 0.819575 -0.298898 -0.0555197 0.311127 -0.501534 -0.365705 0.0777617 0.0931477 0.772234
## 54: 365.71439: 0.269054 0.819686 -0.298983 -0.0554234 0.311061 -0.501499 -0.365730 0.0777118 0.0932094 0.772243
## 55: 365.71438: 0.269030 0.819808 -0.299099 -0.0553812 0.311026 -0.501451 -0.365790 0.0777342 0.0932086 0.772236
## 56: 365.71438: 0.269022 0.819824 -0.299133 -0.0553754 0.311030 -0.501456 -0.365809 0.0777028 0.0931964 0.772300
## 57: 365.71438: 0.269020 0.819832 -0.299130 -0.0553763 0.311028 -0.501452 -0.365821 0.0777140 0.0931971 0.772279
## 58: 365.71438: 0.269022 0.819824 -0.299126 -0.0553776 0.311030 -0.501453 -0.365810 0.0777117 0.0931978 0.772283
##
## Final Estimate of the Negative LLH:
## LLH: -1613.94 norm LLH: -4.96597
## mu ar1 ar2 ar3 ar4
## 6.086864e-04 8.198240e-01 -2.991259e-01 -5.537759e-02 3.110303e-01
## ar5 ma1 omega alpha1 beta1
## -5.014525e-01 -3.658103e-01 3.978309e-07 9.319782e-02 7.722835e-01
##
## R-optimhess Difference Approximated Hessian Matrix:
## mu ar1 ar2 ar3
## mu -2.925216e+08 -2.847106e+05 -2.784705e+05 -266841.99714
## ar1 -2.847106e+05 -1.100574e+03 -8.070901e+02 -434.27976
## ar2 -2.784705e+05 -8.070901e+02 -1.073956e+03 -778.51528
## ar3 -2.668420e+05 -4.342798e+02 -7.785153e+02 -1097.67814
## ar4 -2.703049e+05 -2.169215e+02 -4.214022e+02 -822.30807
## ar5 -2.634540e+05 -7.711196e+01 -2.095625e+02 -444.56051
## ma1 -3.315680e+04 -3.778250e+02 -1.345091e+02 -31.59150
## omega -1.369639e+10 1.673603e+07 1.542630e+07 9350385.84576
## alpha1 2.657556e+04 -2.208810e+01 -4.232767e+01 -34.97202
## beta1 -3.069864e+04 3.809996e+01 3.061092e+01 17.60326
## ar4 ar5 ma1 omega
## mu -2.703049e+05 -2.634540e+05 -3.315680e+04 -1.369639e+10
## ar1 -2.169215e+02 -7.711196e+01 -3.778250e+02 1.673603e+07
## ar2 -4.214022e+02 -2.095625e+02 -1.345091e+02 1.542630e+07
## ar3 -8.223081e+02 -4.445605e+02 -3.159150e+01 9.350386e+06
## ar4 -1.152946e+03 -8.433250e+02 -7.655507e+00 1.193937e+07
## ar5 -8.433250e+02 -1.141962e+03 -1.076582e+01 2.370733e+07
## ma1 -7.655507e+00 -1.076582e+01 -3.519323e+02 2.753895e+07
## omega 1.193937e+07 2.370733e+07 2.753895e+07 -4.194480e+14
## alpha1 -3.997496e+01 -7.811831e+01 -5.989128e+01 -9.153461e+08
## beta1 2.849936e+01 2.834734e+01 4.388268e+01 -1.159172e+09
## alpha1 beta1
## mu 2.657556e+04 -3.069864e+04
## ar1 -2.208810e+01 3.809996e+01
## ar2 -4.232767e+01 3.061092e+01
## ar3 -3.497202e+01 1.760326e+01
## ar4 -3.997496e+01 2.849936e+01
## ar5 -7.811831e+01 2.834734e+01
## ma1 -5.989128e+01 4.388268e+01
## omega -9.153461e+08 -1.159172e+09
## alpha1 -2.933753e+03 -2.767539e+03
## beta1 -2.767539e+03 -3.334359e+03
## attr(,"time")
## Time difference of 0.02720594 secs
##
## --- END OF TRACE ---
##
##
## Time to Estimate Parameters:
## Time difference of 0.121706 secs
##
## Title:
## GARCH Modelling
##
## Call:
## garchFit(formula = ~arma(5, 1) + garch(1, 1), data = df22.difflog)
##
## Mean and Variance Equation:
## data ~ arma(5, 1) + garch(1, 1)
## <environment: 0x7fafd724b790>
## [data = df22.difflog]
##
## Conditional Distribution:
## norm
##
## Coefficient(s):
## mu ar1 ar2 ar3 ar4
## 6.0869e-04 8.1982e-01 -2.9913e-01 -5.5378e-02 3.1103e-01
## ar5 ma1 omega alpha1 beta1
## -5.0145e-01 -3.6581e-01 3.9783e-07 9.3198e-02 7.7228e-01
##
## Std. Errors:
## based on Hessian
##
## Error Analysis:
## Estimate Std. Error t value Pr(>|t|)
## mu 6.087e-04 8.471e-05 7.186 6.69e-13 ***
## ar1 8.198e-01 7.165e-02 11.442 < 2e-16 ***
## ar2 -2.991e-01 7.201e-02 -4.154 3.27e-05 ***
## ar3 -5.538e-02 6.812e-02 -0.813 0.4163
## ar4 3.110e-01 6.510e-02 4.777 1.78e-06 ***
## ar5 -5.015e-01 5.000e-02 -10.029 < 2e-16 ***
## ma1 -3.658e-01 8.055e-02 -4.541 5.59e-06 ***
## omega 3.978e-07 2.806e-07 1.418 0.1563
## alpha1 9.320e-02 4.655e-02 2.002 0.0453 *
## beta1 7.723e-01 1.202e-01 6.424 1.33e-10 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Log Likelihood:
## 1613.94 normalized: 4.96597
##
## Description:
## Thu Apr 30 16:55:59 2020 by user:
##
##
## Standardised Residuals Tests:
## Statistic p-Value
## Jarque-Bera Test R Chi^2 0.4475601 0.799491
## Shapiro-Wilk Test R W 0.9965797 0.7184867
## Ljung-Box Test R Q(10) 35.19155 0.0001158062
## Ljung-Box Test R Q(15) 49.83029 1.28346e-05
## Ljung-Box Test R Q(20) 61.18389 4.663229e-06
## Ljung-Box Test R^2 Q(10) 3.870005 0.9530208
## Ljung-Box Test R^2 Q(15) 4.625821 0.9948484
## Ljung-Box Test R^2 Q(20) 16.90143 0.6593643
## LM Arch Test R TR^2 4.752299 0.9657447
##
## Information Criterion Statistics:
## AIC BIC SIC HQIC
## -9.870401 -9.753975 -9.872220 -9.823935
summary(garchFit(~arma(5,1)+garch(1,0), df22.difflog))
##
## Series Initialization:
## ARMA Model: arma
## Formula Mean: ~ arma(5, 1)
## GARCH Model: garch
## Formula Variance: ~ garch(1, 0)
## ARMA Order: 5 1
## Max ARMA Order: 5
## GARCH Order: 1 0
## Max GARCH Order: 1
## Maximum Order: 5
## Conditional Dist: norm
## h.start: 6
## llh.start: 1
## Length of Series: 325
## Recursion Init: mci
## Series Scale: 0.002262591
##
## Parameter Initialization:
## Initial Parameters: $params
## Limits of Transformations: $U, $V
## Which Parameters are Fixed? $includes
## Parameter Matrix:
## U V params includes
## mu -3.35797923 3.357979 0.34224892 TRUE
## ar1 -0.99999999 1.000000 0.76916698 TRUE
## ar2 -0.99999999 1.000000 -0.26344089 TRUE
## ar3 -0.99999999 1.000000 -0.04869207 TRUE
## ar4 -0.99999999 1.000000 0.31133629 TRUE
## ar5 -0.99999999 1.000000 -0.47352930 TRUE
## ma1 -0.99999999 1.000000 -0.26786887 TRUE
## omega 0.00000100 100.000000 0.10000000 TRUE
## alpha1 0.00000001 1.000000 0.10000000 TRUE
## gamma1 -0.99999999 1.000000 0.10000000 FALSE
## delta 0.00000000 2.000000 2.00000000 FALSE
## skew 0.10000000 10.000000 1.00000000 FALSE
## shape 1.00000000 10.000000 4.00000000 FALSE
## Index List of Parameters to be Optimized:
## mu ar1 ar2 ar3 ar4 ar5 ma1 omega alpha1
## 1 2 3 4 5 6 7 8 9
## Persistence: 0.1
##
##
## --- START OF TRACE ---
## Selected Algorithm: nlminb
##
## R coded nlminb Solver:
##
## 0: 656.92593: 0.342249 0.769167 -0.263441 -0.0486921 0.311336 -0.473529 -0.267869 0.100000 0.100000
## 1: 411.39194: 0.334326 0.719643 -0.309220 -0.0938436 0.278791 -0.505450 -0.280305 1.06289 0.351924
## 2: 407.29115: 0.334049 0.745665 -0.273313 -0.0606229 0.311782 -0.482879 -0.268738 1.04190 0.332778
## 3: 387.88711: 0.320198 0.724967 -0.296579 -0.101467 0.276034 -0.519209 -0.266348 0.780927 0.211536
## 4: 381.50468: 0.321064 0.745784 -0.265518 -0.0681479 0.311563 -0.490873 -0.258885 0.741282 0.192545
## 5: 372.94902: 0.312822 0.731338 -0.276035 -0.0833404 0.304037 -0.496966 -0.265312 0.598536 0.124656
## 6: 371.73803: 0.311627 0.746773 -0.255515 -0.0654637 0.321182 -0.484252 -0.261429 0.572152 0.114446
## 7: 371.00952: 0.302306 0.744145 -0.260390 -0.0829683 0.302904 -0.499720 -0.262409 0.548801 0.108482
## 8: 370.38398: 0.292004 0.740892 -0.256099 -0.0746217 0.324622 -0.477351 -0.271440 0.535843 0.109528
## 9: 370.13521: 0.281298 0.764679 -0.244808 -0.0797040 0.313179 -0.491924 -0.273997 0.522562 0.110399
## 10: 369.92133: 0.279562 0.752659 -0.255774 -0.0809292 0.321060 -0.481661 -0.281635 0.518408 0.106459
## 11: 369.85168: 0.274403 0.768501 -0.252858 -0.0771533 0.322529 -0.482120 -0.287386 0.516276 0.105139
## 12: 369.80973: 0.268216 0.772768 -0.260184 -0.0817655 0.317428 -0.485790 -0.296173 0.515065 0.104321
## 13: 369.70064: 0.262850 0.780875 -0.263244 -0.0733996 0.319989 -0.477153 -0.303659 0.517708 0.101692
## 14: 369.69037: 0.257572 0.793623 -0.269156 -0.0760200 0.318429 -0.475426 -0.309900 0.520281 0.0946845
## 15: 369.67884: 0.256094 0.791242 -0.268669 -0.0703742 0.320819 -0.475390 -0.315068 0.520360 0.0942031
## 16: 369.67677: 0.255885 0.790859 -0.269799 -0.0721397 0.318831 -0.476551 -0.314756 0.520239 0.0944700
## 17: 369.67263: 0.255676 0.790991 -0.268856 -0.0712070 0.318719 -0.474875 -0.313416 0.519460 0.0962086
## 18: 369.67245: 0.255341 0.791038 -0.268985 -0.0710281 0.318179 -0.474949 -0.313678 0.519150 0.0966565
## 19: 369.67235: 0.255150 0.791586 -0.269267 -0.0707610 0.318561 -0.474723 -0.314371 0.518934 0.0971124
## 20: 369.67226: 0.255131 0.791600 -0.269452 -0.0710565 0.318386 -0.474717 -0.314288 0.518879 0.0970360
## 21: 369.67220: 0.255090 0.791755 -0.269297 -0.0709599 0.318432 -0.474568 -0.314210 0.518943 0.0967813
## 22: 369.67217: 0.255011 0.791762 -0.269364 -0.0709838 0.318336 -0.474655 -0.314217 0.519050 0.0964945
## 23: 369.67216: 0.254962 0.791725 -0.269409 -0.0709866 0.318389 -0.474358 -0.314086 0.519167 0.0963370
## 24: 369.67212: 0.254893 0.791945 -0.269350 -0.0710019 0.318322 -0.474511 -0.314235 0.519208 0.0961793
## 25: 369.67210: 0.254838 0.792110 -0.269609 -0.0709493 0.318464 -0.474463 -0.314386 0.519171 0.0962208
## 26: 369.67210: 0.254808 0.792182 -0.269564 -0.0710164 0.318300 -0.474453 -0.314457 0.519162 0.0962504
## 27: 369.67209: 0.254771 0.792225 -0.269469 -0.0709343 0.318308 -0.474341 -0.314527 0.519196 0.0961856
## 28: 369.67208: 0.254717 0.792171 -0.269519 -0.0708951 0.318278 -0.474402 -0.314411 0.519225 0.0961578
## 29: 369.67208: 0.254705 0.792197 -0.269556 -0.0709168 0.318312 -0.474341 -0.314438 0.519203 0.0961521
## 30: 369.67208: 0.254692 0.792244 -0.269568 -0.0709421 0.318270 -0.474364 -0.314494 0.519202 0.0961522
## 31: 369.67208: 0.254678 0.792307 -0.269581 -0.0708962 0.318305 -0.474364 -0.314509 0.519205 0.0961360
## 32: 369.67208: 0.254665 0.792314 -0.269601 -0.0708950 0.318271 -0.474332 -0.314584 0.519213 0.0961249
## 33: 369.67207: 0.254645 0.792378 -0.269619 -0.0708968 0.318296 -0.474329 -0.314568 0.519209 0.0961594
## 34: 369.67207: 0.254638 0.792362 -0.269651 -0.0709136 0.318288 -0.474346 -0.314584 0.519204 0.0961615
## 35: 369.67207: 0.254630 0.792384 -0.269646 -0.0708853 0.318291 -0.474333 -0.314606 0.519208 0.0961474
## 36: 369.67207: 0.254618 0.792393 -0.269669 -0.0708953 0.318292 -0.474324 -0.314627 0.519200 0.0961587
## 37: 369.67207: 0.254596 0.792456 -0.269703 -0.0708638 0.318297 -0.474347 -0.314639 0.519199 0.0961524
## 38: 369.67207: 0.254595 0.792449 -0.269699 -0.0708570 0.318313 -0.474315 -0.314657 0.519200 0.0961550
## 39: 369.67207: 0.254586 0.792449 -0.269689 -0.0708720 0.318296 -0.474320 -0.314669 0.519206 0.0961436
## 40: 369.67207: 0.254588 0.792468 -0.269698 -0.0708561 0.318279 -0.474306 -0.314683 0.519197 0.0961587
## 41: 369.67207: 0.254584 0.792469 -0.269688 -0.0708824 0.318298 -0.474316 -0.314695 0.519193 0.0961718
## 42: 369.67207: 0.254579 0.792475 -0.269711 -0.0708638 0.318288 -0.474304 -0.314707 0.519198 0.0961588
## 43: 369.67207: 0.254575 0.792482 -0.269701 -0.0708615 0.318290 -0.474303 -0.314706 0.519201 0.0961557
## 44: 369.67207: 0.254577 0.792485 -0.269709 -0.0708620 0.318279 -0.474308 -0.314703 0.519197 0.0961655
## 45: 369.67207: 0.254575 0.792487 -0.269709 -0.0708613 0.318284 -0.474305 -0.314707 0.519197 0.0961645
## 46: 369.67207: 0.254573 0.792490 -0.269710 -0.0708615 0.318287 -0.474303 -0.314711 0.519196 0.0961625
## 47: 369.67207: 0.254568 0.792492 -0.269710 -0.0708611 0.318286 -0.474303 -0.314719 0.519201 0.0961555
## 48: 369.67207: 0.254568 0.792504 -0.269717 -0.0708591 0.318288 -0.474304 -0.314725 0.519198 0.0961646
## 49: 369.67207: 0.254567 0.792503 -0.269718 -0.0708602 0.318286 -0.474303 -0.314723 0.519197 0.0961656
## 50: 369.67207: 0.254566 0.792503 -0.269716 -0.0708592 0.318285 -0.474301 -0.314721 0.519197 0.0961642
## 51: 369.67207: 0.254566 0.792504 -0.269717 -0.0708597 0.318285 -0.474301 -0.314722 0.519197 0.0961628
## 52: 369.67207: 0.254566 0.792503 -0.269717 -0.0708592 0.318285 -0.474301 -0.314723 0.519197 0.0961629
## 53: 369.67207: 0.254566 0.792504 -0.269718 -0.0708589 0.318285 -0.474302 -0.314723 0.519197 0.0961634
## 54: 369.67207: 0.254566 0.792504 -0.269718 -0.0708588 0.318286 -0.474302 -0.314724 0.519196 0.0961638
## 55: 369.67207: 0.254566 0.792505 -0.269719 -0.0708583 0.318285 -0.474301 -0.314724 0.519196 0.0961637
## 56: 369.67207: 0.254566 0.792505 -0.269719 -0.0708583 0.318285 -0.474301 -0.314724 0.519196 0.0961637
##
## Final Estimate of the Negative LLH:
## LLH: -1609.982 norm LLH: -4.953792
## mu ar1 ar2 ar3 ar4
## 5.759787e-04 7.925046e-01 -2.697186e-01 -7.085835e-02 3.182852e-01
## ar5 ma1 omega alpha1
## -4.743015e-01 -3.147241e-01 2.657932e-06 9.616373e-02
##
## R-optimhess Difference Approximated Hessian Matrix:
## mu ar1 ar2 ar3
## mu -2.403192e+08 -206364.59196 -197930.30572 -191267.10010
## ar1 -2.063646e+05 -922.17191 -674.88218 -352.94345
## ar2 -1.979303e+05 -674.88218 -960.33071 -684.03892
## ar3 -1.912671e+05 -352.94345 -684.03892 -976.46514
## ar4 -1.979589e+05 -204.19124 -393.96725 -721.17024
## ar5 -2.006188e+05 -114.14893 -228.55689 -400.63696
## ma1 -1.012914e+04 -360.44388 -130.41195 -35.28902
## omega -1.640944e+09 2041476.43332 2577941.39342 1711182.40162
## alpha1 4.053743e+04 -53.07445 -73.52417 -49.37011
## ar4 ar5 ma1 omega
## mu -197958.91678 -200618.80399 -10129.14320 -1.640944e+09
## ar1 -204.19124 -114.14893 -360.44388 2.041476e+06
## ar2 -393.96725 -228.55689 -130.41195 2.577941e+06
## ar3 -721.17024 -400.63696 -35.28902 1.711182e+06
## ar4 -1021.27786 -732.96580 -26.01856 -9.288302e+05
## ar5 -732.96580 -1017.74709 -25.79690 -8.438515e+05
## ma1 -26.01856 -25.79690 -358.45313 1.922555e+06
## omega -928830.23976 -843851.46618 1922555.34473 -1.954559e+13
## alpha1 20.47444 17.59282 -49.32113 -4.316915e+07
## alpha1
## mu 4.053743e+04
## ar1 -5.307445e+01
## ar2 -7.352417e+01
## ar3 -4.937011e+01
## ar4 2.047444e+01
## ar5 1.759282e+01
## ma1 -4.932113e+01
## omega -4.316915e+07
## alpha1 -2.545195e+02
## attr(,"time")
## Time difference of 0.02093601 secs
##
## --- END OF TRACE ---
##
##
## Time to Estimate Parameters:
## Time difference of 0.123076 secs
##
## Title:
## GARCH Modelling
##
## Call:
## garchFit(formula = ~arma(5, 1) + garch(1, 0), data = df22.difflog)
##
## Mean and Variance Equation:
## data ~ arma(5, 1) + garch(1, 0)
## <environment: 0x7fafd74af0a8>
## [data = df22.difflog]
##
## Conditional Distribution:
## norm
##
## Coefficient(s):
## mu ar1 ar2 ar3 ar4
## 5.7598e-04 7.9250e-01 -2.6972e-01 -7.0858e-02 3.1829e-01
## ar5 ma1 omega alpha1
## -4.7430e-01 -3.1472e-01 2.6579e-06 9.6164e-02
##
## Std. Errors:
## based on Hessian
##
## Error Analysis:
## Estimate Std. Error t value Pr(>|t|)
## mu 5.760e-04 8.898e-05 6.473 9.61e-11 ***
## ar1 7.925e-01 7.569e-02 10.470 < 2e-16 ***
## ar2 -2.697e-01 7.112e-02 -3.792 0.000149 ***
## ar3 -7.086e-02 6.768e-02 -1.047 0.295141
## ar4 3.183e-01 6.479e-02 4.912 9.00e-07 ***
## ar5 -4.743e-01 4.914e-02 -9.652 < 2e-16 ***
## ma1 -3.147e-01 8.139e-02 -3.867 0.000110 ***
## omega 2.658e-06 3.042e-07 8.738 < 2e-16 ***
## alpha1 9.616e-02 8.947e-02 1.075 0.282457
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Log Likelihood:
## 1609.982 normalized: 4.953792
##
## Description:
## Thu Apr 30 16:55:59 2020 by user:
##
##
## Standardised Residuals Tests:
## Statistic p-Value
## Jarque-Bera Test R Chi^2 0.2734848 0.8721949
## Shapiro-Wilk Test R W 0.9962619 0.6439345
## Ljung-Box Test R Q(10) 30.02556 0.0008484318
## Ljung-Box Test R Q(15) 45.6876 5.960481e-05
## Ljung-Box Test R Q(20) 59.62209 8.147078e-06
## Ljung-Box Test R^2 Q(10) 16.71716 0.08086142
## Ljung-Box Test R^2 Q(15) 19.28223 0.2012282
## Ljung-Box Test R^2 Q(20) 36.25517 0.0143522
## LM Arch Test R TR^2 16.92929 0.1522767
##
## Information Criterion Statistics:
## AIC BIC SIC HQIC
## -9.852199 -9.747417 -9.853679 -9.810380
garch_uk1=garch(residual_arima511,order=c(1,1),trace=F)
## Warning in garch(residual_arima511, order = c(1, 1), trace = F): singular
## information
summary(garch_uk1)
##
## Call:
## garch(x = residual_arima511, order = c(1, 1), trace = F)
##
## Model:
## GARCH(1,1)
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.8507 -0.3906 0.2907 0.9232 2.9349
##
## Coefficient(s):
## Estimate Std. Error t value Pr(>|t|)
## a0 2.383e+09 NA NA NA
## a1 2.094e-02 NA NA NA
## b1 1.493e-01 NA NA NA
##
## Diagnostic Tests:
## Jarque Bera Test
##
## data: Residuals
## X-squared = 0.13246, df = 2, p-value = 0.9359
##
##
## Box-Ljung test
##
## data: Squared.Residuals
## X-squared = 0.34731, df = 1, p-value = 0.5556
AIC(garch_uk1) ## AIC is smaller
## [1] 8003.569
garch_uk2=garch(residual_arima511,order=c(0,1),trace=F)
## Warning in garch(residual_arima511, order = c(0, 1), trace = F): singular
## information
summary(garch_uk2)
##
## Call:
## garch(x = residual_arima511, order = c(0, 1), trace = F)
##
## Model:
## GARCH(0,1)
##
## Residuals:
## Min 1Q Median 3Q Max
## -3.0053 -0.4058 0.3059 0.9611 3.1050
##
## Coefficient(s):
## Estimate Std. Error t value Pr(>|t|)
## a0 2.515e+09 NA NA NA
## a1 3.916e-02 NA NA NA
##
## Diagnostic Tests:
## Jarque Bera Test
##
## data: Residuals
## X-squared = 0.089604, df = 2, p-value = 0.9562
##
##
## Box-Ljung test
##
## data: Squared.Residuals
## X-squared = 0.94383, df = 1, p-value = 0.3313
AIC(garch_uk2)
## [1] 8003.624
df1_ca=subset(ifs, (Indicator_Code=='LE_PE_NUM') & (Country_Name=='Canada'))
df1_ca$Time_Period <- gsub("M", "/", df1_ca$Time_Period)
df1_ca['date'] = '/01'
df1_ca$Time_Period <- paste(df1_ca$Time_Period, df1_ca$date, sep="")
df1_ca$Time_Period = as.Date(df1_ca$Time_Period, "%Y/%m/%d")
df1_ca = df1_ca[order(df1_ca$Time_Period),]
df1_ca = subset(df1_ca, select = -c(date))
rownames(df1_ca)=NULL
df1_ca=na.omit(df1_ca)
df33=ts(df1_ca$Value, frequency = 12, start = c(1993, 1))
plot(df33, main="Canada: Employment, Persons, Number of")
plot(decompose(df33))
## There is a increasing trend with little drop in 2018
## There is a obvious seasonality
sp=spectrum(df33, log='no')
# find the cycle with predominant frequecny
sp$fre[which.max(sp$spec)] # 0.0375
## [1] 0.0375
# find the cycle of predominant frequecny
1/sp$fre[which.max(sp$spec)] # 26.7 month
## [1] 26.66667
max(sp$spec) #estimate of spectral density at predominant period which is close to 503508181908.
## [1] 503508181908
U = qchisq(.025,2)
L = qchisq(.975,2)
2*max(sp$spec)/L #lower bound for spectral density
## [1] 136493530941
2*max(sp$spec)/U #upper bound for spectral density
## [1] 1.988751e+13
# The 95% confidence interval is [132631376982, 1.932478e+13]
df33.diff = diff(df33)
plot(df33.diff)
acf2(df33.diff)#p = 1, q = 4,5
## ACF PACF
## [1,] 0.37 0.37
## [2,] 0.14 0.00
## [3,] -0.13 -0.21
## [4,] -0.32 -0.25
## [5,] -0.36 -0.18
## [6,] -0.27 -0.09
## [7,] -0.37 -0.39
## [8,] -0.34 -0.43
## [9,] -0.13 -0.31
## [10,] 0.14 -0.21
## [11,] 0.36 -0.22
## [12,] 0.92 0.82
## [13,] 0.35 -0.24
## [14,] 0.13 -0.09
## [15,] -0.13 -0.02
## [16,] -0.31 -0.07
## [17,] -0.35 -0.02
## [18,] -0.26 0.01
## [19,] -0.36 -0.01
## [20,] -0.32 0.09
## [21,] -0.12 -0.10
## [22,] 0.14 -0.02
## [23,] 0.34 -0.05
## [24,] 0.88 0.13
## [25,] 0.33 -0.09
## [26,] 0.12 -0.02
## [27,] -0.13 0.01
## [28,] -0.30 -0.04
## [29,] -0.34 -0.06
## [30,] -0.24 0.05
## [31,] -0.35 -0.01
## [32,] -0.31 -0.06
## [33,] -0.12 -0.02
## [34,] 0.14 -0.04
## [35,] 0.34 0.07
## [36,] 0.84 0.02
## [37,] 0.32 -0.02
## [38,] 0.11 -0.03
## [39,] -0.12 0.02
## [40,] -0.28 0.02
## [41,] -0.33 0.00
## [42,] -0.23 -0.01
## [43,] -0.34 0.01
## [44,] -0.31 -0.01
## [45,] -0.11 -0.02
## [46,] 0.14 -0.02
## [47,] 0.33 -0.03
## [48,] 0.80 0.05
arima411 = Arima(df33,order=c(4,1,1),include.drift = T)
arima511 = Arima(df33,order=c(5,1,1),include.drift = T)
AIC(arima411)
## [1] 8307.449
AIC(arima511)
## [1] 8304.877
BIC(arima411)
## [1] 8333.695
BIC(arima511)
## [1] 8334.872
auto.arima(df33,seasonal = F)
## Series: df33
## ARIMA(4,1,1) with drift
##
## Coefficients:
## ar1 ar2 ar3 ar4 ma1 drift
## 0.956 -0.1363 -0.1455 -0.1848 -0.8778 19618.209
## s.e. 0.060 0.0775 0.0774 0.0584 0.0269 1818.469
##
## sigma^2 estimated as 1.753e+10: log likelihood=-4146.72
## AIC=8307.45 AICc=8307.82 BIC=8333.69
#arima511 has lower AIC and BIC
## SEASONA ARIMA
fit33 <- auto.arima(df33, trace = TRUE) # Best model: ARIMA(0,1,0)(1,1,2)[12]
##
## Fitting models using approximations to speed things up...
##
## ARIMA(2,1,2)(1,1,1)[12] : 7018.275
## ARIMA(0,1,0)(0,1,0)[12] : 7106.15
## ARIMA(1,1,0)(1,1,0)[12] : 7044.461
## ARIMA(0,1,1)(0,1,1)[12] : 7009.45
## ARIMA(0,1,1)(0,1,0)[12] : 7107.929
## ARIMA(0,1,1)(1,1,1)[12] : 7019.621
## ARIMA(0,1,1)(0,1,2)[12] : 7011.272
## ARIMA(0,1,1)(1,1,0)[12] : 7045.88
## ARIMA(0,1,1)(1,1,2)[12] : 7010.959
## ARIMA(0,1,0)(0,1,1)[12] : 7007.426
## ARIMA(0,1,0)(1,1,1)[12] : 7017.631
## ARIMA(0,1,0)(0,1,2)[12] : 7009.225
## ARIMA(0,1,0)(1,1,0)[12] : 7044.131
## ARIMA(0,1,0)(1,1,2)[12] : 7008.907
## ARIMA(1,1,0)(0,1,1)[12] : 7011.53
## ARIMA(1,1,1)(0,1,1)[12] : 7010.49
##
## Now re-fitting the best model(s) without approximations...
##
## ARIMA(0,1,0)(0,1,1)[12] : 7289.008
##
## Best model: ARIMA(0,1,0)(0,1,1)[12]
sarima(df33,0,1,0,1,1,2,12)
## initial value 10.811485
## iter 2 value 10.689700
## iter 3 value 10.648455
## iter 4 value 10.644581
## iter 5 value 10.642988
## iter 6 value 10.642576
## iter 7 value 10.640678
## iter 8 value 10.638508
## iter 9 value 10.637715
## iter 10 value 10.637566
## iter 11 value 10.637474
## iter 12 value 10.637417
## iter 13 value 10.637138
## iter 14 value 10.637029
## iter 15 value 10.636995
## iter 15 value 10.636995
## final value 10.636995
## converged
## initial value 10.642761
## iter 2 value 10.642536
## iter 3 value 10.642266
## iter 4 value 10.642265
## iter 4 value 10.642265
## iter 4 value 10.642265
## final value 10.642265
## converged
## Warning in sqrt(diag(fitit$var.coef)): NaNs produced
## Warning in sqrt(diag(fitit$var.coef)): NaNs produced
## $fit
##
## Call:
## stats::arima(x = xdata, order = c(p, d, q), seasonal = list(order = c(P, D,
## Q), period = S), include.mean = !no.constant, transform.pars = trans, fixed = fixed,
## optim.control = list(trace = trc, REPORT = 1, reltol = tol))
##
## Coefficients:
## Warning in sqrt(diag(x$var.coef)): NaNs produced
## sar1 sma1 sma2
## -0.6881 0.013 -0.4662
## s.e. NaN NaN NaN
##
## sigma^2 estimated as 1.711e+09: log likelihood = -3642.48, aic = 7292.97
##
## $degrees_of_freedom
## [1] 299
##
## $ttable
## Estimate SE t.value p.value
## sar1 -0.6881 NaN NaN NaN
## sma1 0.0130 NaN NaN NaN
## sma2 -0.4662 NaN NaN NaN
##
## $AIC
## [1] 23.30021
##
## $AICc
## [1] 23.30046
##
## $BIC
## [1] 23.34763
pred33=forecast(fit33,h=48)
pred33
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## Apr 2019 18769843 18716639 18823047 18688474 18851212
## May 2019 19174437 19099195 19249679 19059364 19289510
## Jun 2019 19365885 19273733 19458038 19224950 19506821
## Jul 2019 19299169 19192760 19405577 19136431 19461907
## Aug 2019 19259613 19140644 19378581 19077666 19441559
## Sep 2019 19137818 19007495 19268141 18938506 19337130
## Oct 2019 19161719 19020954 19302485 18946437 19377001
## Nov 2019 19145259 18994775 19295744 18915113 19375405
## Dec 2019 19094805 18935192 19254418 18850698 19338912
## Jan 2020 18856984 18688737 19025231 18599673 19114295
## Feb 2020 18916639 18740180 19093098 18646769 19186509
## Mar 2020 18935045 18750740 19119350 18653175 19216915
## Apr 2020 19040188 18842877 19237499 18738427 19341949
## May 2020 19444782 19235271 19654293 19124362 19765201
## Jun 2020 19636230 19415191 19857269 19298181 19974280
## Jul 2020 19569514 19337519 19801508 19214709 19924319
## Aug 2020 19529958 19287502 19772413 19159154 19900762
## Sep 2020 19408163 19155679 19660647 19022022 19794303
## Oct 2020 19432064 19169936 19694193 19031173 19832955
## Nov 2020 19415604 19144173 19687035 19000487 19830721
## Dec 2020 19365150 19084725 19645574 18936278 19794022
## Jan 2021 19127329 18838190 19416468 18685129 19569528
## Feb 2021 19186984 18889386 19484582 18731847 19642120
## Mar 2021 19205390 18899567 19511213 18737674 19673106
## Apr 2021 19310533 18992384 19628681 18823967 19797099
## May 2021 19715127 19385113 20045140 19210414 20219839
## Jun 2021 19906575 19565108 20248043 19384346 20428804
## Jul 2021 19839858 19487310 20192407 19300681 20379035
## Aug 2021 19800303 19437010 20163595 19244695 20355911
## Sep 2021 19678508 19304780 20052235 19106941 20250075
## Oct 2021 19702409 19318530 20086288 19115317 20289501
## Nov 2021 19685949 19292180 20079717 19083732 20288166
## Dec 2021 19635495 19232079 20038911 19018523 20252466
## Jan 2022 19397674 18984836 19810512 18766293 20029055
## Feb 2022 19457329 19035279 19879378 18811859 20102798
## Mar 2022 19475735 19044670 19906799 18816478 20134991
## Apr 2022 19580878 19137223 20024533 18902366 20259389
## May 2022 19985471 19529574 20441369 19288236 20682707
## Jun 2022 20176920 19709100 20644740 19461451 20892390
## Jul 2022 20110203 19630757 20589650 19376953 20843453
## Aug 2022 20070647 19579850 20561445 19320038 20821257
## Sep 2022 19948853 19446961 20450744 19181276 20716429
## Oct 2022 19972754 19460008 20485500 19188577 20756931
## Nov 2022 19956294 19432919 20479669 19155861 20756726
## Dec 2022 19905839 19372047 20439632 19089474 20722205
## Jan 2023 19668019 19124008 20212029 18836026 20500011
## Feb 2023 19727674 19173633 20281714 18880342 20575005
## Mar 2023 19746079 19182188 20309971 18883682 20608477
autoplot(pred33)
## 4. ARCH/GARCH
fit4=auto.arima(df33, seasonal = FALSE, trace = TRUE) # ARIMA(5,1,1)
##
## Fitting models using approximations to speed things up...
##
## ARIMA(2,1,2) with drift : 8328.876
## ARIMA(0,1,0) with drift : 8423.307
## ARIMA(1,1,0) with drift : 8379.721
## ARIMA(0,1,1) with drift : 8387.116
## ARIMA(0,1,0) : 8425.806
## ARIMA(1,1,2) with drift : 8377.805
## ARIMA(2,1,1) with drift : Inf
## ARIMA(3,1,2) with drift : 8238.029
## ARIMA(3,1,1) with drift : 8295.752
## ARIMA(4,1,2) with drift : 8287.616
## ARIMA(3,1,3) with drift : 8328.485
## ARIMA(2,1,3) with drift : 8329.249
## ARIMA(4,1,1) with drift : 8285.513
## ARIMA(4,1,3) with drift : Inf
## ARIMA(3,1,2) : Inf
##
## Now re-fitting the best model(s) without approximations...
##
## ARIMA(3,1,2) with drift : Inf
## ARIMA(4,1,1) with drift : 8307.815
##
## Best model: ARIMA(4,1,1) with drift
df33.diff=diff(df33)
plot(df33.diff, main=" diff data")
df33.log=log(df33)
plot(df33.log, main=" log data")
df33.difflog=diff(log(df33))
plot(df33.difflog, main=" difflog data")
acf(df33.difflog, lag.max = 100) # from pacf plot, we could observe the seasonality
pacf(df33.difflog, lag.max = 100)# from pacf plot, AR(2) or AR(4)
acf2(df33^2)
## ACF PACF
## [1,] 0.99 0.99
## [2,] 0.97 -0.12
## [3,] 0.95 -0.03
## [4,] 0.93 0.02
## [5,] 0.92 0.11
## [6,] 0.91 0.11
## [7,] 0.90 0.05
## [8,] 0.90 0.08
## [9,] 0.89 0.07
## [10,] 0.89 0.05
## [11,] 0.89 0.00
## [12,] 0.88 -0.05
## [13,] 0.87 -0.28
## [14,] 0.85 -0.03
## [15,] 0.84 -0.01
## [16,] 0.82 -0.01
## [17,] 0.81 0.07
## [18,] 0.80 0.04
## [19,] 0.79 0.02
## [20,] 0.78 0.03
## [21,] 0.78 0.05
## [22,] 0.78 0.03
## [23,] 0.78 0.01
## [24,] 0.77 0.00
## [25,] 0.76 -0.19
## [26,] 0.74 -0.02
## [27,] 0.73 0.00
## [28,] 0.71 0.01
## [29,] 0.70 0.02
## [30,] 0.69 0.00
## [31,] 0.68 0.01
## [32,] 0.68 0.03
## [33,] 0.67 0.04
## [34,] 0.67 0.02
## [35,] 0.67 -0.02
## [36,] 0.66 0.00
## [37,] 0.65 -0.15
## [38,] 0.64 0.00
## [39,] 0.62 0.00
## [40,] 0.61 0.01
## [41,] 0.59 0.00
## [42,] 0.58 -0.01
## [43,] 0.58 0.00
## [44,] 0.57 0.01
## [45,] 0.57 0.02
## [46,] 0.56 0.01
## [47,] 0.56 0.00
## [48,] 0.56 -0.01
arima511.2=arima(df33, order=c(5,1,1))
residual_arima511.2 = arima511.2$residuals
acf2(residual_arima511.2^2)
## ACF PACF
## [1,] -0.19 -0.19
## [2,] -0.30 -0.35
## [3,] -0.05 -0.23
## [4,] 0.20 0.03
## [5,] 0.00 -0.01
## [6,] -0.22 -0.18
## [7,] 0.00 -0.09
## [8,] 0.26 0.14
## [9,] -0.05 0.00
## [10,] -0.28 -0.19
## [11,] -0.18 -0.38
## [12,] 0.78 0.66
## [13,] -0.19 -0.06
## [14,] -0.29 -0.05
## [15,] -0.03 -0.03
## [16,] 0.21 0.06
## [17,] 0.02 0.03
## [18,] -0.22 -0.01
## [19,] -0.01 -0.03
## [20,] 0.23 -0.16
## [21,] -0.03 0.03
## [22,] -0.27 -0.05
## [23,] -0.16 -0.06
## [24,] 0.70 0.14
## [25,] -0.16 0.03
## [26,] -0.26 0.05
## [27,] -0.04 -0.03
## [28,] 0.23 0.12
## [29,] 0.00 -0.10
## [30,] -0.20 0.04
## [31,] -0.01 0.02
## [32,] 0.25 0.07
## [33,] -0.03 -0.05
## [34,] -0.26 -0.03
## [35,] -0.14 0.03
## [36,] 0.65 0.11
## [37,] -0.15 0.06
## [38,] -0.24 0.01
## [39,] -0.02 0.07
## [40,] 0.21 -0.04
## [41,] -0.01 0.04
## [42,] -0.19 0.02
## [43,] -0.02 -0.03
## [44,] 0.21 -0.13
## [45,] -0.03 -0.03
## [46,] -0.26 -0.02
## [47,] -0.14 -0.10
## [48,] 0.62 0.12
garch_ca1=garch(residual_arima511.2,order=c(4,1),trace=F)
## Warning in garch(residual_arima511.2, order = c(4, 1), trace = F): singular
## information
summary(garch_ca1)
##
## Call:
## garch(x = residual_arima511.2, order = c(4, 1), trace = F)
##
## Model:
## GARCH(4,1)
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.7583 -0.1879 0.2742 0.8254 3.0769
##
## Coefficient(s):
## Estimate Std. Error t value Pr(>|t|)
## a0 1.350e+10 NA NA NA
## a1 7.244e-05 NA NA NA
## b1 6.394e-02 NA NA NA
## b2 6.652e-02 NA NA NA
## b3 6.925e-02 NA NA NA
## b4 6.698e-02 NA NA NA
##
## Diagnostic Tests:
## Jarque Bera Test
##
## data: Residuals
## X-squared = 1.2106, df = 2, p-value = 0.5459
##
##
## Box-Ljung test
##
## data: Squared.Residuals
## X-squared = 11.595, df = 1, p-value = 0.0006614
AIC(garch_ca1) ## AIC is smaller
## [1] 8266.518
garch_ca2=garch(residual_arima511.2,order=c(0,1),trace=F) # (q,p) # AIC is smaller
## Warning in garch(residual_arima511.2, order = c(0, 1), trace = F): singular
## information
summary(garch_ca2)
##
## Call:
## garch(x = residual_arima511.2, order = c(0, 1), trace = F)
##
## Model:
## GARCH(0,1)
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.8620 -0.1929 0.2840 0.8508 3.1926
##
## Coefficient(s):
## Estimate Std. Error t value Pr(>|t|)
## a0 1.710e+10 NA NA NA
## a1 2.048e-04 NA NA NA
##
## Diagnostic Tests:
## Jarque Bera Test
##
## data: Residuals
## X-squared = 1.439, df = 2, p-value = 0.487
##
##
## Box-Ljung test
##
## data: Squared.Residuals
## X-squared = 11.979, df = 1, p-value = 0.0005381
AIC(garch_ca2)
## [1] 8337.543
par(mfrow = c(1,3))
plot(df11,ylab = "Number", main="United States: Employment")
plot(df22,ylab = "Number", main="United Kingdom: Employment")
plot(df33,ylab = "Number", main="Canada: Employment")